In [1]:
# from google.colab import drive
# drive.mount('/content/drive')
# folderPath = "/content/drive/Othercomputers/z590/Colab Notebooks/NN Project"
folderPath = "."

Imports

In [2]:
import numpy as np
import pandas as pd
import matplotlib.pyplot as plt
import seaborn as sns
from sklearn.model_selection import train_test_split
from sklearn.metrics import accuracy_score
from sklearn import preprocessing
from sklearn.preprocessing import OneHotEncoder
In [3]:
import tensorflow as tf
tf.__version__
Out[3]:
'2.10.0'
In [4]:
from tensorflow.keras.models import Sequential
from tensorflow.keras.layers import Flatten, Dense
from tensorflow.keras import regularizers, optimizers
In [5]:
import random
random.seed(0)

Part A¶

A.1.

In [6]:
signal_df = pd.read_csv(folderPath + "/NN Project Data - Signal.csv")
In [7]:
signal_df.shape
Out[7]:
(1599, 12)
In [8]:
signal_df.info()
<class 'pandas.core.frame.DataFrame'>
RangeIndex: 1599 entries, 0 to 1598
Data columns (total 12 columns):
 #   Column           Non-Null Count  Dtype  
---  ------           --------------  -----  
 0   Parameter 1      1599 non-null   float64
 1   Parameter 2      1599 non-null   float64
 2   Parameter 3      1599 non-null   float64
 3   Parameter 4      1599 non-null   float64
 4   Parameter 5      1599 non-null   float64
 5   Parameter 6      1599 non-null   float64
 6   Parameter 7      1599 non-null   float64
 7   Parameter 8      1599 non-null   float64
 8   Parameter 9      1599 non-null   float64
 9   Parameter 10     1599 non-null   float64
 10  Parameter 11     1599 non-null   float64
 11  Signal_Strength  1599 non-null   int64  
dtypes: float64(11), int64(1)
memory usage: 150.0 KB

1.B.

In [9]:
percent_missing = (signal_df.isnull().sum() * 100)/ len(signal_df)
print(percent_missing)
Parameter 1        0.0
Parameter 2        0.0
Parameter 3        0.0
Parameter 4        0.0
Parameter 5        0.0
Parameter 6        0.0
Parameter 7        0.0
Parameter 8        0.0
Parameter 9        0.0
Parameter 10       0.0
Parameter 11       0.0
Signal_Strength    0.0
dtype: float64

There are no null values in the data

1.C.

In [10]:
signal_df.duplicated()
Out[10]:
0       False
1       False
2       False
3       False
4        True
        ...  
1594    False
1595    False
1596     True
1597    False
1598    False
Length: 1599, dtype: bool
In [11]:
signal_df.duplicated().sum()
Out[11]:
240

We can see there are 240 duplicates.
Dropping duplicates

In [12]:
signal_data = signal_df.drop_duplicates()
In [13]:
signal_data.shape
Out[13]:
(1359, 12)

1.D. & 1.E.

In [14]:
signal_columns = signal_df.columns
signal_columns
Out[14]:
Index(['Parameter 1', 'Parameter 2', 'Parameter 3', 'Parameter 4',
       'Parameter 5', 'Parameter 6', 'Parameter 7', 'Parameter 8',
       'Parameter 9', 'Parameter 10', 'Parameter 11', 'Signal_Strength'],
      dtype='object')

Drawing box plot and distplot for all parameters and Target variable.

In [15]:
for col in signal_columns:
    sns.boxplot(signal_data[col])
    sns.displot(signal_data[col])
    plt.show()
    print('Distribution of ',col)
    print('Mean is:',signal_data[col].mean())
    print('Median is:',signal_data[col].median())
    print('Mode is:',signal_data[col].mode())
    print('Standard deviation is:',signal_data[col].std())
    print('Skewness is:',signal_data[col].skew())
    print('Maximum is:',signal_data[col].max())
    print('Minimum is:',signal_data[col].min())
c:\ProgramData\Anaconda3\lib\site-packages\seaborn\_decorators.py:36: FutureWarning: Pass the following variable as a keyword arg: x. From version 0.12, the only valid positional argument will be `data`, and passing other arguments without an explicit keyword will result in an error or misinterpretation.
  warnings.warn(
Distribution of  Parameter 1
Mean is: 8.310596026490087
Median is: 7.9
Mode is: 0    7.2
Name: Parameter 1, dtype: float64
Standard deviation is: 1.736989807532466
Skewness is: 0.9410413664561449
Maximum is: 15.9
Minimum is: 4.6
c:\ProgramData\Anaconda3\lib\site-packages\seaborn\_decorators.py:36: FutureWarning: Pass the following variable as a keyword arg: x. From version 0.12, the only valid positional argument will be `data`, and passing other arguments without an explicit keyword will result in an error or misinterpretation.
  warnings.warn(
Distribution of  Parameter 2
Mean is: 0.5294775570272258
Median is: 0.52
Mode is: 0    0.5
Name: Parameter 2, dtype: float64
Standard deviation is: 0.18303131761907185
Skewness is: 0.7292789463991854
Maximum is: 1.58
Minimum is: 0.12
c:\ProgramData\Anaconda3\lib\site-packages\seaborn\_decorators.py:36: FutureWarning: Pass the following variable as a keyword arg: x. From version 0.12, the only valid positional argument will be `data`, and passing other arguments without an explicit keyword will result in an error or misinterpretation.
  warnings.warn(
c:\ProgramData\Anaconda3\lib\site-packages\seaborn\_decorators.py:36: FutureWarning: Pass the following variable as a keyword arg: x. From version 0.12, the only valid positional argument will be `data`, and passing other arguments without an explicit keyword will result in an error or misinterpretation.
  warnings.warn(
Distribution of  Parameter 3
Mean is: 0.27233259749815975
Median is: 0.26
Mode is: 0    0.0
Name: Parameter 3, dtype: float64
Standard deviation is: 0.1955365445504639
Skewness is: 0.31272554238899036
Maximum is: 1.0
Minimum is: 0.0
Distribution of  Parameter 4
Mean is: 2.523399558498897
Median is: 2.2
Mode is: 0    2.0
Name: Parameter 4, dtype: float64
Standard deviation is: 1.3523137577104198
Skewness is: 4.548153403940447
Maximum is: 15.5
Minimum is: 0.9
c:\ProgramData\Anaconda3\lib\site-packages\seaborn\_decorators.py:36: FutureWarning: Pass the following variable as a keyword arg: x. From version 0.12, the only valid positional argument will be `data`, and passing other arguments without an explicit keyword will result in an error or misinterpretation.
  warnings.warn(
Distribution of  Parameter 5
Mean is: 0.0881236203090504
Median is: 0.079
Mode is: 0    0.08
Name: Parameter 5, dtype: float64
Standard deviation is: 0.04937686244348626
Skewness is: 5.502487294623722
Maximum is: 0.611
Minimum is: 0.012
c:\ProgramData\Anaconda3\lib\site-packages\seaborn\_decorators.py:36: FutureWarning: Pass the following variable as a keyword arg: x. From version 0.12, the only valid positional argument will be `data`, and passing other arguments without an explicit keyword will result in an error or misinterpretation.
  warnings.warn(
Distribution of  Parameter 6
Mean is: 15.893303899926417
Median is: 14.0
Mode is: 0    6.0
Name: Parameter 6, dtype: float64
Standard deviation is: 10.447270259048695
Skewness is: 1.2265794991760643
Maximum is: 72.0
Minimum is: 1.0
c:\ProgramData\Anaconda3\lib\site-packages\seaborn\_decorators.py:36: FutureWarning: Pass the following variable as a keyword arg: x. From version 0.12, the only valid positional argument will be `data`, and passing other arguments without an explicit keyword will result in an error or misinterpretation.
  warnings.warn(
Distribution of  Parameter 7
Mean is: 46.82597498160412
Median is: 38.0
Mode is: 0    28.0
Name: Parameter 7, dtype: float64
Standard deviation is: 33.40894570661654
Skewness is: 1.5403680777213933
Maximum is: 289.0
Minimum is: 6.0
c:\ProgramData\Anaconda3\lib\site-packages\seaborn\_decorators.py:36: FutureWarning: Pass the following variable as a keyword arg: x. From version 0.12, the only valid positional argument will be `data`, and passing other arguments without an explicit keyword will result in an error or misinterpretation.
  warnings.warn(
Distribution of  Parameter 8
Mean is: 0.9967089477557026
Median is: 0.9967
Mode is: 0    0.9968
Name: Parameter 8, dtype: float64
Standard deviation is: 0.0018689171325591398
Skewness is: 0.04477785573116107
Maximum is: 1.00369
Minimum is: 0.99007
c:\ProgramData\Anaconda3\lib\site-packages\seaborn\_decorators.py:36: FutureWarning: Pass the following variable as a keyword arg: x. From version 0.12, the only valid positional argument will be `data`, and passing other arguments without an explicit keyword will result in an error or misinterpretation.
  warnings.warn(
Distribution of  Parameter 9
Mean is: 3.309786607799856
Median is: 3.31
Mode is: 0    3.3
Name: Parameter 9, dtype: float64
Standard deviation is: 0.15503631128729595
Skewness is: 0.2320322752014824
Maximum is: 4.01
Minimum is: 2.74
c:\ProgramData\Anaconda3\lib\site-packages\seaborn\_decorators.py:36: FutureWarning: Pass the following variable as a keyword arg: x. From version 0.12, the only valid positional argument will be `data`, and passing other arguments without an explicit keyword will result in an error or misinterpretation.
  warnings.warn(
Distribution of  Parameter 10
Mean is: 0.6587049300956593
Median is: 0.62
Mode is: 0    0.54
Name: Parameter 10, dtype: float64
Standard deviation is: 0.17066689057420695
Skewness is: 2.4065046145674196
Maximum is: 2.0
Minimum is: 0.33
c:\ProgramData\Anaconda3\lib\site-packages\seaborn\_decorators.py:36: FutureWarning: Pass the following variable as a keyword arg: x. From version 0.12, the only valid positional argument will be `data`, and passing other arguments without an explicit keyword will result in an error or misinterpretation.
  warnings.warn(
Distribution of  Parameter 11
Mean is: 10.432315428013228
Median is: 10.2
Mode is: 0    9.5
Name: Parameter 11, dtype: float64
Standard deviation is: 1.0820654499497833
Skewness is: 0.8598411692319623
Maximum is: 14.9
Minimum is: 8.4
c:\ProgramData\Anaconda3\lib\site-packages\seaborn\_decorators.py:36: FutureWarning: Pass the following variable as a keyword arg: x. From version 0.12, the only valid positional argument will be `data`, and passing other arguments without an explicit keyword will result in an error or misinterpretation.
  warnings.warn(
Distribution of  Signal_Strength
Mean is: 5.6232523914643116
Median is: 6.0
Mode is: 0    5
Name: Signal_Strength, dtype: int64
Standard deviation is: 0.8235780017165619
Skewness is: 0.19240658731658308
Maximum is: 8
Minimum is: 3
  1. Parameter 7 has maximum standard deviation
  2. Signal strength - 5 has the highest count followed by 6 and 7
  3. Parameter 4, 5 have has huge number of outliers
  4. Most of the Parameter are right skewed
In [16]:
sns.pairplot(signal_data)
plt.show()
  1. Parameter 6 and Parameter 7 are correlated
  2. Parameter 1 is positively correlated to Parameter 3 and Parameter 8
  3. Parameter 1 and parameter 9 are negatively correlated
In [17]:
b_corr = signal_data.corr()
plt.subplots(figsize = (12, 7)) 
sns.heatmap(b_corr, annot=True, cmap="YlGnBu_r")
Out[17]:
<AxesSubplot:>
  1. Parameter 2 is most negatively correlated with Signal Strength
  2. Parameter 11 is most positively correlated with Signal Strength
  3. None of the correlations are close to 0.8 to be considered highly correlated

2.A.

In [18]:
X = signal_data.drop("Signal_Strength", axis=1)
y = signal_data['Signal_Strength']
In [19]:
y_cat = tf.keras.utils.to_categorical(y)
y_cat[0]
Out[19]:
array([0., 0., 0., 0., 0., 1., 0., 0., 0.], dtype=float32)

2.B.

In [20]:
# splitting to create test data
x_train, x_test, y_train, y_test = train_test_split(X, y_cat, test_size=.30, random_state = 0)

2.C.

In [21]:
print(x_train.shape)
print(y_train.shape)

print(x_test.shape)
print(y_test.shape)
(951, 11)
(951, 9)
(408, 11)
(408, 9)

data are in sync with equal number of rows for x and y for both train and test datasets

2.D.

In [22]:
normalizer = preprocessing.Normalizer()

# Normalizing train data
normalized_train_X = normalizer.fit_transform(x_train)

# Normalizing test data
normalized_test_X = normalizer.transform(x_test)

3.A.

In [23]:
# Initialize Sequential model
model = Sequential()
In [24]:
# Adding a dense layer
model.add(Dense(11, activation='relu'))
model.add(Dense(9, activation='relu'))
model.add(Dense(9, activation='softmax'))
In [25]:
model.compile(loss="categorical_crossentropy", metrics=["accuracy"], optimizer="sgd")

3.B.

In [26]:
history = model.fit(x=normalized_train_X, y= y_train, epochs=100, validation_split=0.35)
Epoch 1/100
20/20 [==============================] - 0s 9ms/step - loss: 2.1808 - accuracy: 0.3123 - val_loss: 2.1522 - val_accuracy: 0.4024
Epoch 2/100
20/20 [==============================] - 0s 2ms/step - loss: 2.1254 - accuracy: 0.4175 - val_loss: 2.1054 - val_accuracy: 0.4054
Epoch 3/100
20/20 [==============================] - 0s 3ms/step - loss: 2.0772 - accuracy: 0.4110 - val_loss: 2.0622 - val_accuracy: 0.4054
Epoch 4/100
20/20 [==============================] - 0s 2ms/step - loss: 2.0321 - accuracy: 0.4369 - val_loss: 2.0222 - val_accuracy: 0.4114
Epoch 5/100
20/20 [==============================] - 0s 2ms/step - loss: 1.9903 - accuracy: 0.4175 - val_loss: 1.9837 - val_accuracy: 0.4114
Epoch 6/100
20/20 [==============================] - 0s 2ms/step - loss: 1.9496 - accuracy: 0.4401 - val_loss: 1.9473 - val_accuracy: 0.4174
Epoch 7/100
20/20 [==============================] - 0s 3ms/step - loss: 1.9113 - accuracy: 0.4385 - val_loss: 1.9130 - val_accuracy: 0.4204
Epoch 8/100
20/20 [==============================] - 0s 2ms/step - loss: 1.8751 - accuracy: 0.4498 - val_loss: 1.8804 - val_accuracy: 0.4384
Epoch 9/100
20/20 [==============================] - 0s 2ms/step - loss: 1.8406 - accuracy: 0.4288 - val_loss: 1.8499 - val_accuracy: 0.4384
Epoch 10/100
20/20 [==============================] - 0s 2ms/step - loss: 1.8082 - accuracy: 0.4401 - val_loss: 1.8213 - val_accuracy: 0.4414
Epoch 11/100
20/20 [==============================] - 0s 2ms/step - loss: 1.7778 - accuracy: 0.4417 - val_loss: 1.7944 - val_accuracy: 0.4414
Epoch 12/100
20/20 [==============================] - 0s 2ms/step - loss: 1.7493 - accuracy: 0.4434 - val_loss: 1.7688 - val_accuracy: 0.4444
Epoch 13/100
20/20 [==============================] - 0s 3ms/step - loss: 1.7217 - accuracy: 0.4450 - val_loss: 1.7449 - val_accuracy: 0.4354
Epoch 14/100
20/20 [==============================] - 0s 3ms/step - loss: 1.6960 - accuracy: 0.4450 - val_loss: 1.7225 - val_accuracy: 0.4384
Epoch 15/100
20/20 [==============================] - 0s 2ms/step - loss: 1.6721 - accuracy: 0.4466 - val_loss: 1.7014 - val_accuracy: 0.4354
Epoch 16/100
20/20 [==============================] - 0s 2ms/step - loss: 1.6495 - accuracy: 0.4353 - val_loss: 1.6815 - val_accuracy: 0.4324
Epoch 17/100
20/20 [==============================] - 0s 2ms/step - loss: 1.6281 - accuracy: 0.4304 - val_loss: 1.6628 - val_accuracy: 0.4354
Epoch 18/100
20/20 [==============================] - 0s 2ms/step - loss: 1.6076 - accuracy: 0.4369 - val_loss: 1.6449 - val_accuracy: 0.4204
Epoch 19/100
20/20 [==============================] - 0s 2ms/step - loss: 1.5885 - accuracy: 0.4256 - val_loss: 1.6278 - val_accuracy: 0.4114
Epoch 20/100
20/20 [==============================] - 0s 2ms/step - loss: 1.5702 - accuracy: 0.4272 - val_loss: 1.6115 - val_accuracy: 0.4144
Epoch 21/100
20/20 [==============================] - 0s 2ms/step - loss: 1.5522 - accuracy: 0.4239 - val_loss: 1.5954 - val_accuracy: 0.4204
Epoch 22/100
20/20 [==============================] - 0s 2ms/step - loss: 1.5342 - accuracy: 0.4239 - val_loss: 1.5784 - val_accuracy: 0.4294
Epoch 23/100
20/20 [==============================] - 0s 2ms/step - loss: 1.5140 - accuracy: 0.4256 - val_loss: 1.5591 - val_accuracy: 0.4354
Epoch 24/100
20/20 [==============================] - 0s 2ms/step - loss: 1.4917 - accuracy: 0.4288 - val_loss: 1.5400 - val_accuracy: 0.4324
Epoch 25/100
20/20 [==============================] - 0s 2ms/step - loss: 1.4709 - accuracy: 0.4288 - val_loss: 1.5230 - val_accuracy: 0.4294
Epoch 26/100
20/20 [==============================] - 0s 2ms/step - loss: 1.4519 - accuracy: 0.4272 - val_loss: 1.5072 - val_accuracy: 0.4294
Epoch 27/100
20/20 [==============================] - 0s 2ms/step - loss: 1.4343 - accuracy: 0.4256 - val_loss: 1.4923 - val_accuracy: 0.4294
Epoch 28/100
20/20 [==============================] - 0s 2ms/step - loss: 1.4174 - accuracy: 0.4256 - val_loss: 1.4789 - val_accuracy: 0.4294
Epoch 29/100
20/20 [==============================] - 0s 2ms/step - loss: 1.4020 - accuracy: 0.4256 - val_loss: 1.4668 - val_accuracy: 0.4354
Epoch 30/100
20/20 [==============================] - 0s 2ms/step - loss: 1.3875 - accuracy: 0.4272 - val_loss: 1.4555 - val_accuracy: 0.4354
Epoch 31/100
20/20 [==============================] - 0s 4ms/step - loss: 1.3742 - accuracy: 0.4320 - val_loss: 1.4448 - val_accuracy: 0.4354
Epoch 32/100
20/20 [==============================] - 0s 3ms/step - loss: 1.3617 - accuracy: 0.4304 - val_loss: 1.4351 - val_accuracy: 0.4354
Epoch 33/100
20/20 [==============================] - 0s 2ms/step - loss: 1.3502 - accuracy: 0.4304 - val_loss: 1.4260 - val_accuracy: 0.4384
Epoch 34/100
20/20 [==============================] - 0s 3ms/step - loss: 1.3390 - accuracy: 0.4288 - val_loss: 1.4179 - val_accuracy: 0.4384
Epoch 35/100
20/20 [==============================] - 0s 3ms/step - loss: 1.3289 - accuracy: 0.4239 - val_loss: 1.4103 - val_accuracy: 0.4384
Epoch 36/100
20/20 [==============================] - 0s 3ms/step - loss: 1.3197 - accuracy: 0.4256 - val_loss: 1.4036 - val_accuracy: 0.4384
Epoch 37/100
20/20 [==============================] - 0s 2ms/step - loss: 1.3112 - accuracy: 0.4272 - val_loss: 1.3975 - val_accuracy: 0.4414
Epoch 38/100
20/20 [==============================] - 0s 2ms/step - loss: 1.3034 - accuracy: 0.4304 - val_loss: 1.3913 - val_accuracy: 0.4414
Epoch 39/100
20/20 [==============================] - 0s 2ms/step - loss: 1.2957 - accuracy: 0.4320 - val_loss: 1.3866 - val_accuracy: 0.4414
Epoch 40/100
20/20 [==============================] - 0s 2ms/step - loss: 1.2890 - accuracy: 0.4288 - val_loss: 1.3818 - val_accuracy: 0.4264
Epoch 41/100
20/20 [==============================] - 0s 2ms/step - loss: 1.2826 - accuracy: 0.4304 - val_loss: 1.3770 - val_accuracy: 0.4354
Epoch 42/100
20/20 [==============================] - 0s 2ms/step - loss: 1.2765 - accuracy: 0.4256 - val_loss: 1.3731 - val_accuracy: 0.4324
Epoch 43/100
20/20 [==============================] - 0s 2ms/step - loss: 1.2708 - accuracy: 0.4417 - val_loss: 1.3691 - val_accuracy: 0.4174
Epoch 44/100
20/20 [==============================] - 0s 2ms/step - loss: 1.2654 - accuracy: 0.4353 - val_loss: 1.3657 - val_accuracy: 0.4234
Epoch 45/100
20/20 [==============================] - 0s 2ms/step - loss: 1.2606 - accuracy: 0.4515 - val_loss: 1.3622 - val_accuracy: 0.4294
Epoch 46/100
20/20 [==============================] - 0s 2ms/step - loss: 1.2559 - accuracy: 0.4401 - val_loss: 1.3590 - val_accuracy: 0.4414
Epoch 47/100
20/20 [==============================] - 0s 2ms/step - loss: 1.2518 - accuracy: 0.4320 - val_loss: 1.3561 - val_accuracy: 0.4414
Epoch 48/100
20/20 [==============================] - 0s 2ms/step - loss: 1.2480 - accuracy: 0.4450 - val_loss: 1.3536 - val_accuracy: 0.4294
Epoch 49/100
20/20 [==============================] - 0s 2ms/step - loss: 1.2443 - accuracy: 0.4369 - val_loss: 1.3513 - val_accuracy: 0.4264
Epoch 50/100
20/20 [==============================] - 0s 2ms/step - loss: 1.2408 - accuracy: 0.4385 - val_loss: 1.3488 - val_accuracy: 0.4294
Epoch 51/100
20/20 [==============================] - 0s 2ms/step - loss: 1.2376 - accuracy: 0.4417 - val_loss: 1.3467 - val_accuracy: 0.4384
Epoch 52/100
20/20 [==============================] - 0s 2ms/step - loss: 1.2345 - accuracy: 0.4401 - val_loss: 1.3450 - val_accuracy: 0.4384
Epoch 53/100
20/20 [==============================] - 0s 2ms/step - loss: 1.2315 - accuracy: 0.4401 - val_loss: 1.3433 - val_accuracy: 0.4354
Epoch 54/100
20/20 [==============================] - 0s 2ms/step - loss: 1.2288 - accuracy: 0.4450 - val_loss: 1.3415 - val_accuracy: 0.4234
Epoch 55/100
20/20 [==============================] - 0s 2ms/step - loss: 1.2261 - accuracy: 0.4482 - val_loss: 1.3399 - val_accuracy: 0.4474
Epoch 56/100
20/20 [==============================] - 0s 2ms/step - loss: 1.2237 - accuracy: 0.4693 - val_loss: 1.3379 - val_accuracy: 0.4204
Epoch 57/100
20/20 [==============================] - 0s 2ms/step - loss: 1.2215 - accuracy: 0.4612 - val_loss: 1.3362 - val_accuracy: 0.4234
Epoch 58/100
20/20 [==============================] - 0s 2ms/step - loss: 1.2190 - accuracy: 0.4595 - val_loss: 1.3340 - val_accuracy: 0.4294
Epoch 59/100
20/20 [==============================] - 0s 2ms/step - loss: 1.2170 - accuracy: 0.4547 - val_loss: 1.3327 - val_accuracy: 0.4204
Epoch 60/100
20/20 [==============================] - 0s 2ms/step - loss: 1.2148 - accuracy: 0.4595 - val_loss: 1.3316 - val_accuracy: 0.4444
Epoch 61/100
20/20 [==============================] - 0s 2ms/step - loss: 1.2127 - accuracy: 0.4612 - val_loss: 1.3301 - val_accuracy: 0.4444
Epoch 62/100
20/20 [==============================] - 0s 2ms/step - loss: 1.2109 - accuracy: 0.4693 - val_loss: 1.3291 - val_accuracy: 0.4505
Epoch 63/100
20/20 [==============================] - 0s 2ms/step - loss: 1.2089 - accuracy: 0.4854 - val_loss: 1.3264 - val_accuracy: 0.4324
Epoch 64/100
20/20 [==============================] - 0s 2ms/step - loss: 1.2071 - accuracy: 0.4660 - val_loss: 1.3246 - val_accuracy: 0.4264
Epoch 65/100
20/20 [==============================] - 0s 2ms/step - loss: 1.2054 - accuracy: 0.4612 - val_loss: 1.3234 - val_accuracy: 0.4234
Epoch 66/100
20/20 [==============================] - 0s 2ms/step - loss: 1.2038 - accuracy: 0.4466 - val_loss: 1.3224 - val_accuracy: 0.4444
Epoch 67/100
20/20 [==============================] - 0s 2ms/step - loss: 1.2021 - accuracy: 0.4676 - val_loss: 1.3211 - val_accuracy: 0.4324
Epoch 68/100
20/20 [==============================] - 0s 2ms/step - loss: 1.2008 - accuracy: 0.4595 - val_loss: 1.3205 - val_accuracy: 0.4384
Epoch 69/100
20/20 [==============================] - 0s 2ms/step - loss: 1.1994 - accuracy: 0.4612 - val_loss: 1.3190 - val_accuracy: 0.4324
Epoch 70/100
20/20 [==============================] - 0s 2ms/step - loss: 1.1978 - accuracy: 0.4515 - val_loss: 1.3191 - val_accuracy: 0.4535
Epoch 71/100
20/20 [==============================] - 0s 2ms/step - loss: 1.1964 - accuracy: 0.4612 - val_loss: 1.3179 - val_accuracy: 0.4505
Epoch 72/100
20/20 [==============================] - 0s 2ms/step - loss: 1.1951 - accuracy: 0.4693 - val_loss: 1.3162 - val_accuracy: 0.4474
Epoch 73/100
20/20 [==============================] - 0s 3ms/step - loss: 1.1937 - accuracy: 0.4709 - val_loss: 1.3151 - val_accuracy: 0.4444
Epoch 74/100
20/20 [==============================] - 0s 3ms/step - loss: 1.1928 - accuracy: 0.4595 - val_loss: 1.3141 - val_accuracy: 0.4444
Epoch 75/100
20/20 [==============================] - 0s 3ms/step - loss: 1.1915 - accuracy: 0.4693 - val_loss: 1.3131 - val_accuracy: 0.4414
Epoch 76/100
20/20 [==============================] - 0s 2ms/step - loss: 1.1906 - accuracy: 0.4628 - val_loss: 1.3125 - val_accuracy: 0.4444
Epoch 77/100
20/20 [==============================] - 0s 3ms/step - loss: 1.1894 - accuracy: 0.4531 - val_loss: 1.3117 - val_accuracy: 0.4444
Epoch 78/100
20/20 [==============================] - 0s 3ms/step - loss: 1.1882 - accuracy: 0.4693 - val_loss: 1.3110 - val_accuracy: 0.4414
Epoch 79/100
20/20 [==============================] - 0s 3ms/step - loss: 1.1872 - accuracy: 0.4741 - val_loss: 1.3105 - val_accuracy: 0.4505
Epoch 80/100
20/20 [==============================] - 0s 3ms/step - loss: 1.1864 - accuracy: 0.4644 - val_loss: 1.3097 - val_accuracy: 0.4505
Epoch 81/100
20/20 [==============================] - 0s 3ms/step - loss: 1.1852 - accuracy: 0.4838 - val_loss: 1.3094 - val_accuracy: 0.4505
Epoch 82/100
20/20 [==============================] - 0s 3ms/step - loss: 1.1842 - accuracy: 0.4628 - val_loss: 1.3096 - val_accuracy: 0.4505
Epoch 83/100
20/20 [==============================] - 0s 2ms/step - loss: 1.1834 - accuracy: 0.4919 - val_loss: 1.3084 - val_accuracy: 0.4535
Epoch 84/100
20/20 [==============================] - 0s 2ms/step - loss: 1.1824 - accuracy: 0.4790 - val_loss: 1.3071 - val_accuracy: 0.4505
Epoch 85/100
20/20 [==============================] - 0s 2ms/step - loss: 1.1816 - accuracy: 0.4806 - val_loss: 1.3062 - val_accuracy: 0.4505
Epoch 86/100
20/20 [==============================] - 0s 2ms/step - loss: 1.1807 - accuracy: 0.4660 - val_loss: 1.3054 - val_accuracy: 0.4535
Epoch 87/100
20/20 [==============================] - 0s 2ms/step - loss: 1.1799 - accuracy: 0.4547 - val_loss: 1.3050 - val_accuracy: 0.4505
Epoch 88/100
20/20 [==============================] - 0s 2ms/step - loss: 1.1792 - accuracy: 0.4757 - val_loss: 1.3050 - val_accuracy: 0.4505
Epoch 89/100
20/20 [==============================] - 0s 4ms/step - loss: 1.1782 - accuracy: 0.4676 - val_loss: 1.3046 - val_accuracy: 0.4474
Epoch 90/100
20/20 [==============================] - 0s 2ms/step - loss: 1.1777 - accuracy: 0.4871 - val_loss: 1.3042 - val_accuracy: 0.4505
Epoch 91/100
20/20 [==============================] - 0s 2ms/step - loss: 1.1769 - accuracy: 0.4822 - val_loss: 1.3033 - val_accuracy: 0.4505
Epoch 92/100
20/20 [==============================] - 0s 2ms/step - loss: 1.1764 - accuracy: 0.4741 - val_loss: 1.3033 - val_accuracy: 0.4505
Epoch 93/100
20/20 [==============================] - 0s 2ms/step - loss: 1.1758 - accuracy: 0.4644 - val_loss: 1.3040 - val_accuracy: 0.4384
Epoch 94/100
20/20 [==============================] - 0s 2ms/step - loss: 1.1751 - accuracy: 0.4951 - val_loss: 1.3032 - val_accuracy: 0.4474
Epoch 95/100
20/20 [==============================] - 0s 2ms/step - loss: 1.1744 - accuracy: 0.4951 - val_loss: 1.3021 - val_accuracy: 0.4474
Epoch 96/100
20/20 [==============================] - 0s 2ms/step - loss: 1.1736 - accuracy: 0.4984 - val_loss: 1.3004 - val_accuracy: 0.4414
Epoch 97/100
20/20 [==============================] - 0s 2ms/step - loss: 1.1730 - accuracy: 0.4693 - val_loss: 1.3006 - val_accuracy: 0.4505
Epoch 98/100
20/20 [==============================] - 0s 2ms/step - loss: 1.1724 - accuracy: 0.4676 - val_loss: 1.3005 - val_accuracy: 0.4474
Epoch 99/100
20/20 [==============================] - 0s 2ms/step - loss: 1.1717 - accuracy: 0.4693 - val_loss: 1.3003 - val_accuracy: 0.4595
Epoch 100/100
20/20 [==============================] - 0s 2ms/step - loss: 1.1711 - accuracy: 0.4773 - val_loss: 1.3004 - val_accuracy: 0.4474
In [27]:
model.summary()
Model: "sequential"
_________________________________________________________________
 Layer (type)                Output Shape              Param #   
=================================================================
 dense (Dense)               (None, 11)                132       
                                                                 
 dense_1 (Dense)             (None, 9)                 108       
                                                                 
 dense_2 (Dense)             (None, 9)                 90        
                                                                 
=================================================================
Total params: 330
Trainable params: 330
Non-trainable params: 0
_________________________________________________________________
In [28]:
eval_hist = model.evaluate(normalized_test_X, y_test)
13/13 [==============================] - 0s 707us/step - loss: 1.2283 - accuracy: 0.4951
In [29]:
y_pred = model.predict(normalized_test_X)
y_pred[0]
13/13 [==============================] - 0s 582us/step
Out[29]:
array([0.00573476, 0.00581845, 0.0066573 , 0.00368344, 0.03174327,
       0.40199715, 0.43749237, 0.09248103, 0.01439231], dtype=float32)

3.C.i.

In [30]:
# Training Loss (blue) and Validation Loss (red)
plt.plot(history.history['loss'], color='b', label="loss")
plt.plot(history.history['val_loss'], color='r', label="val_loss")

plt.show()

3.C.ii

In [31]:
# Training Accuracy (blue) and Validation Accuracy (red)
plt.plot(history.history['accuracy'], color='b', label="accuracy")
plt.plot(history.history['val_accuracy'], color='r', label="val_accuracy")

plt.show()

3.D.

In [32]:
# New Model

# Initialize Sequential model
model_2 = Sequential()
model_2.add(Dense(9, activation='relu'))
model_2.add(Dense(9, activation='sigmoid', kernel_initializer='uniform'))
model_2.add(Dense(9, activation='relu'))
model_2.add(Dense(9, activation='softmax'))
In [33]:
model_2.compile(loss="mse", metrics=["accuracy"], optimizer="sgd")
In [34]:
history = model_2.fit(x=normalized_train_X, y= y_train, epochs=100, validation_split=0.35)
Epoch 1/100
20/20 [==============================] - 0s 7ms/step - loss: 0.1143 - accuracy: 0.0000e+00 - val_loss: 0.1138 - val_accuracy: 0.0000e+00
Epoch 2/100
20/20 [==============================] - 0s 2ms/step - loss: 0.1140 - accuracy: 0.0000e+00 - val_loss: 0.1135 - val_accuracy: 0.0000e+00
Epoch 3/100
20/20 [==============================] - 0s 2ms/step - loss: 0.1136 - accuracy: 0.0000e+00 - val_loss: 0.1131 - val_accuracy: 0.0000e+00
Epoch 4/100
20/20 [==============================] - 0s 2ms/step - loss: 0.1133 - accuracy: 0.0000e+00 - val_loss: 0.1128 - val_accuracy: 0.0000e+00
Epoch 5/100
20/20 [==============================] - 0s 2ms/step - loss: 0.1129 - accuracy: 0.0000e+00 - val_loss: 0.1125 - val_accuracy: 0.0000e+00
Epoch 6/100
20/20 [==============================] - 0s 2ms/step - loss: 0.1126 - accuracy: 0.0000e+00 - val_loss: 0.1122 - val_accuracy: 0.0000e+00
Epoch 7/100
20/20 [==============================] - 0s 2ms/step - loss: 0.1123 - accuracy: 0.0000e+00 - val_loss: 0.1118 - val_accuracy: 0.0000e+00
Epoch 8/100
20/20 [==============================] - 0s 2ms/step - loss: 0.1120 - accuracy: 0.0000e+00 - val_loss: 0.1115 - val_accuracy: 0.0000e+00
Epoch 9/100
20/20 [==============================] - 0s 2ms/step - loss: 0.1116 - accuracy: 0.0000e+00 - val_loss: 0.1112 - val_accuracy: 0.0000e+00
Epoch 10/100
20/20 [==============================] - 0s 2ms/step - loss: 0.1113 - accuracy: 0.0000e+00 - val_loss: 0.1109 - val_accuracy: 0.0000e+00
Epoch 11/100
20/20 [==============================] - 0s 2ms/step - loss: 0.1110 - accuracy: 0.0000e+00 - val_loss: 0.1106 - val_accuracy: 0.0000e+00
Epoch 12/100
20/20 [==============================] - 0s 2ms/step - loss: 0.1107 - accuracy: 0.0000e+00 - val_loss: 0.1103 - val_accuracy: 0.0000e+00
Epoch 13/100
20/20 [==============================] - 0s 2ms/step - loss: 0.1104 - accuracy: 0.0000e+00 - val_loss: 0.1100 - val_accuracy: 0.0000e+00
Epoch 14/100
20/20 [==============================] - 0s 2ms/step - loss: 0.1101 - accuracy: 0.0000e+00 - val_loss: 0.1097 - val_accuracy: 0.0000e+00
Epoch 15/100
20/20 [==============================] - 0s 2ms/step - loss: 0.1098 - accuracy: 0.0000e+00 - val_loss: 0.1094 - val_accuracy: 0.0000e+00
Epoch 16/100
20/20 [==============================] - 0s 2ms/step - loss: 0.1095 - accuracy: 0.0000e+00 - val_loss: 0.1091 - val_accuracy: 0.0000e+00
Epoch 17/100
20/20 [==============================] - 0s 2ms/step - loss: 0.1092 - accuracy: 0.0000e+00 - val_loss: 0.1088 - val_accuracy: 0.0000e+00
Epoch 18/100
20/20 [==============================] - 0s 2ms/step - loss: 0.1089 - accuracy: 0.0000e+00 - val_loss: 0.1085 - val_accuracy: 0.0000e+00
Epoch 19/100
20/20 [==============================] - 0s 2ms/step - loss: 0.1086 - accuracy: 0.0000e+00 - val_loss: 0.1082 - val_accuracy: 0.0000e+00
Epoch 20/100
20/20 [==============================] - 0s 2ms/step - loss: 0.1084 - accuracy: 0.0000e+00 - val_loss: 0.1080 - val_accuracy: 0.0000e+00
Epoch 21/100
20/20 [==============================] - 0s 2ms/step - loss: 0.1081 - accuracy: 0.0000e+00 - val_loss: 0.1077 - val_accuracy: 0.0000e+00
Epoch 22/100
20/20 [==============================] - 0s 3ms/step - loss: 0.1078 - accuracy: 0.0000e+00 - val_loss: 0.1074 - val_accuracy: 0.0000e+00
Epoch 23/100
20/20 [==============================] - 0s 4ms/step - loss: 0.1075 - accuracy: 0.0000e+00 - val_loss: 0.1071 - val_accuracy: 0.0000e+00
Epoch 24/100
20/20 [==============================] - 0s 2ms/step - loss: 0.1073 - accuracy: 0.0000e+00 - val_loss: 0.1069 - val_accuracy: 0.0000e+00
Epoch 25/100
20/20 [==============================] - 0s 2ms/step - loss: 0.1070 - accuracy: 0.0000e+00 - val_loss: 0.1066 - val_accuracy: 0.0000e+00
Epoch 26/100
20/20 [==============================] - 0s 2ms/step - loss: 0.1067 - accuracy: 0.0000e+00 - val_loss: 0.1063 - val_accuracy: 0.0000e+00
Epoch 27/100
20/20 [==============================] - 0s 2ms/step - loss: 0.1065 - accuracy: 0.0000e+00 - val_loss: 0.1061 - val_accuracy: 0.0000e+00
Epoch 28/100
20/20 [==============================] - 0s 2ms/step - loss: 0.1062 - accuracy: 0.0000e+00 - val_loss: 0.1058 - val_accuracy: 0.0000e+00
Epoch 29/100
20/20 [==============================] - 0s 3ms/step - loss: 0.1059 - accuracy: 0.0000e+00 - val_loss: 0.1056 - val_accuracy: 0.0000e+00
Epoch 30/100
20/20 [==============================] - 0s 2ms/step - loss: 0.1057 - accuracy: 0.0000e+00 - val_loss: 0.1053 - val_accuracy: 0.0000e+00
Epoch 31/100
20/20 [==============================] - 0s 2ms/step - loss: 0.1054 - accuracy: 0.0000e+00 - val_loss: 0.1050 - val_accuracy: 0.0000e+00
Epoch 32/100
20/20 [==============================] - 0s 2ms/step - loss: 0.1052 - accuracy: 0.0000e+00 - val_loss: 0.1048 - val_accuracy: 0.0000e+00
Epoch 33/100
20/20 [==============================] - 0s 2ms/step - loss: 0.1049 - accuracy: 0.0000e+00 - val_loss: 0.1045 - val_accuracy: 0.0000e+00
Epoch 34/100
20/20 [==============================] - 0s 2ms/step - loss: 0.1047 - accuracy: 0.0000e+00 - val_loss: 0.1043 - val_accuracy: 0.0000e+00
Epoch 35/100
20/20 [==============================] - 0s 2ms/step - loss: 0.1044 - accuracy: 0.0000e+00 - val_loss: 0.1041 - val_accuracy: 0.0000e+00
Epoch 36/100
20/20 [==============================] - 0s 2ms/step - loss: 0.1042 - accuracy: 0.0000e+00 - val_loss: 0.1038 - val_accuracy: 0.0000e+00
Epoch 37/100
20/20 [==============================] - 0s 2ms/step - loss: 0.1039 - accuracy: 0.0000e+00 - val_loss: 0.1036 - val_accuracy: 0.0000e+00
Epoch 38/100
20/20 [==============================] - 0s 3ms/step - loss: 0.1037 - accuracy: 0.0000e+00 - val_loss: 0.1033 - val_accuracy: 0.0000e+00
Epoch 39/100
20/20 [==============================] - 0s 2ms/step - loss: 0.1034 - accuracy: 0.0000e+00 - val_loss: 0.1031 - val_accuracy: 0.0000e+00
Epoch 40/100
20/20 [==============================] - 0s 3ms/step - loss: 0.1032 - accuracy: 0.0000e+00 - val_loss: 0.1029 - val_accuracy: 0.0000e+00
Epoch 41/100
20/20 [==============================] - 0s 2ms/step - loss: 0.1029 - accuracy: 0.0000e+00 - val_loss: 0.1026 - val_accuracy: 0.0000e+00
Epoch 42/100
20/20 [==============================] - 0s 2ms/step - loss: 0.1027 - accuracy: 0.0000e+00 - val_loss: 0.1024 - val_accuracy: 0.0000e+00
Epoch 43/100
20/20 [==============================] - 0s 2ms/step - loss: 0.1025 - accuracy: 0.0000e+00 - val_loss: 0.1022 - val_accuracy: 0.0000e+00
Epoch 44/100
20/20 [==============================] - 0s 3ms/step - loss: 0.1022 - accuracy: 0.0000e+00 - val_loss: 0.1019 - val_accuracy: 0.0000e+00
Epoch 45/100
20/20 [==============================] - 0s 3ms/step - loss: 0.1020 - accuracy: 0.0000e+00 - val_loss: 0.1017 - val_accuracy: 0.0000e+00
Epoch 46/100
20/20 [==============================] - 0s 3ms/step - loss: 0.1018 - accuracy: 0.0000e+00 - val_loss: 0.1015 - val_accuracy: 0.0000e+00
Epoch 47/100
20/20 [==============================] - 0s 2ms/step - loss: 0.1015 - accuracy: 0.0000e+00 - val_loss: 0.1012 - val_accuracy: 0.0000e+00
Epoch 48/100
20/20 [==============================] - 0s 3ms/step - loss: 0.1013 - accuracy: 0.0000e+00 - val_loss: 0.1010 - val_accuracy: 0.0000e+00
Epoch 49/100
20/20 [==============================] - 0s 3ms/step - loss: 0.1011 - accuracy: 0.0000e+00 - val_loss: 0.1008 - val_accuracy: 0.0000e+00
Epoch 50/100
20/20 [==============================] - 0s 3ms/step - loss: 0.1008 - accuracy: 0.0000e+00 - val_loss: 0.1006 - val_accuracy: 0.0000e+00
Epoch 51/100
20/20 [==============================] - 0s 2ms/step - loss: 0.1006 - accuracy: 0.0000e+00 - val_loss: 0.1004 - val_accuracy: 0.0000e+00
Epoch 52/100
20/20 [==============================] - 0s 2ms/step - loss: 0.1004 - accuracy: 0.0000e+00 - val_loss: 0.1001 - val_accuracy: 0.0000e+00
Epoch 53/100
20/20 [==============================] - 0s 2ms/step - loss: 0.1002 - accuracy: 0.0000e+00 - val_loss: 0.0999 - val_accuracy: 0.0000e+00
Epoch 54/100
20/20 [==============================] - 0s 2ms/step - loss: 0.0999 - accuracy: 0.0000e+00 - val_loss: 0.0997 - val_accuracy: 0.0000e+00
Epoch 55/100
20/20 [==============================] - 0s 2ms/step - loss: 0.0997 - accuracy: 0.0097 - val_loss: 0.0995 - val_accuracy: 0.4264
Epoch 56/100
20/20 [==============================] - 0s 2ms/step - loss: 0.0995 - accuracy: 0.4191 - val_loss: 0.0993 - val_accuracy: 0.4264
Epoch 57/100
20/20 [==============================] - 0s 3ms/step - loss: 0.0993 - accuracy: 0.4191 - val_loss: 0.0991 - val_accuracy: 0.4264
Epoch 58/100
20/20 [==============================] - 0s 2ms/step - loss: 0.0991 - accuracy: 0.4191 - val_loss: 0.0988 - val_accuracy: 0.4264
Epoch 59/100
20/20 [==============================] - 0s 2ms/step - loss: 0.0988 - accuracy: 0.4191 - val_loss: 0.0986 - val_accuracy: 0.4264
Epoch 60/100
20/20 [==============================] - 0s 2ms/step - loss: 0.0986 - accuracy: 0.4191 - val_loss: 0.0984 - val_accuracy: 0.4264
Epoch 61/100
20/20 [==============================] - 0s 2ms/step - loss: 0.0984 - accuracy: 0.4191 - val_loss: 0.0982 - val_accuracy: 0.4264
Epoch 62/100
20/20 [==============================] - 0s 2ms/step - loss: 0.0982 - accuracy: 0.4191 - val_loss: 0.0980 - val_accuracy: 0.4264
Epoch 63/100
20/20 [==============================] - 0s 2ms/step - loss: 0.0980 - accuracy: 0.4191 - val_loss: 0.0978 - val_accuracy: 0.4264
Epoch 64/100
20/20 [==============================] - 0s 2ms/step - loss: 0.0978 - accuracy: 0.4191 - val_loss: 0.0976 - val_accuracy: 0.4264
Epoch 65/100
20/20 [==============================] - 0s 2ms/step - loss: 0.0976 - accuracy: 0.4191 - val_loss: 0.0974 - val_accuracy: 0.4264
Epoch 66/100
20/20 [==============================] - 0s 2ms/step - loss: 0.0974 - accuracy: 0.4191 - val_loss: 0.0972 - val_accuracy: 0.4264
Epoch 67/100
20/20 [==============================] - 0s 2ms/step - loss: 0.0972 - accuracy: 0.4191 - val_loss: 0.0970 - val_accuracy: 0.4264
Epoch 68/100
20/20 [==============================] - 0s 2ms/step - loss: 0.0969 - accuracy: 0.4191 - val_loss: 0.0968 - val_accuracy: 0.4264
Epoch 69/100
20/20 [==============================] - 0s 2ms/step - loss: 0.0967 - accuracy: 0.4191 - val_loss: 0.0966 - val_accuracy: 0.4264
Epoch 70/100
20/20 [==============================] - 0s 2ms/step - loss: 0.0965 - accuracy: 0.4191 - val_loss: 0.0964 - val_accuracy: 0.4264
Epoch 71/100
20/20 [==============================] - 0s 3ms/step - loss: 0.0963 - accuracy: 0.4191 - val_loss: 0.0962 - val_accuracy: 0.4264
Epoch 72/100
20/20 [==============================] - 0s 2ms/step - loss: 0.0961 - accuracy: 0.4191 - val_loss: 0.0960 - val_accuracy: 0.4264
Epoch 73/100
20/20 [==============================] - 0s 2ms/step - loss: 0.0959 - accuracy: 0.4191 - val_loss: 0.0958 - val_accuracy: 0.4264
Epoch 74/100
20/20 [==============================] - 0s 2ms/step - loss: 0.0957 - accuracy: 0.4191 - val_loss: 0.0956 - val_accuracy: 0.4264
Epoch 75/100
20/20 [==============================] - 0s 2ms/step - loss: 0.0955 - accuracy: 0.4191 - val_loss: 0.0954 - val_accuracy: 0.4264
Epoch 76/100
20/20 [==============================] - 0s 2ms/step - loss: 0.0953 - accuracy: 0.4191 - val_loss: 0.0952 - val_accuracy: 0.4264
Epoch 77/100
20/20 [==============================] - 0s 2ms/step - loss: 0.0951 - accuracy: 0.4191 - val_loss: 0.0950 - val_accuracy: 0.4264
Epoch 78/100
20/20 [==============================] - 0s 2ms/step - loss: 0.0949 - accuracy: 0.4191 - val_loss: 0.0948 - val_accuracy: 0.4264
Epoch 79/100
20/20 [==============================] - 0s 3ms/step - loss: 0.0947 - accuracy: 0.4191 - val_loss: 0.0946 - val_accuracy: 0.4264
Epoch 80/100
20/20 [==============================] - 0s 2ms/step - loss: 0.0945 - accuracy: 0.4191 - val_loss: 0.0945 - val_accuracy: 0.4264
Epoch 81/100
20/20 [==============================] - 0s 2ms/step - loss: 0.0943 - accuracy: 0.4191 - val_loss: 0.0943 - val_accuracy: 0.4264
Epoch 82/100
20/20 [==============================] - 0s 2ms/step - loss: 0.0941 - accuracy: 0.4191 - val_loss: 0.0941 - val_accuracy: 0.4264
Epoch 83/100
20/20 [==============================] - 0s 2ms/step - loss: 0.0939 - accuracy: 0.4191 - val_loss: 0.0939 - val_accuracy: 0.4264
Epoch 84/100
20/20 [==============================] - 0s 2ms/step - loss: 0.0938 - accuracy: 0.4191 - val_loss: 0.0937 - val_accuracy: 0.4264
Epoch 85/100
20/20 [==============================] - 0s 2ms/step - loss: 0.0936 - accuracy: 0.4191 - val_loss: 0.0935 - val_accuracy: 0.4264
Epoch 86/100
20/20 [==============================] - 0s 2ms/step - loss: 0.0934 - accuracy: 0.4191 - val_loss: 0.0934 - val_accuracy: 0.4264
Epoch 87/100
20/20 [==============================] - 0s 2ms/step - loss: 0.0932 - accuracy: 0.4191 - val_loss: 0.0932 - val_accuracy: 0.4264
Epoch 88/100
20/20 [==============================] - 0s 2ms/step - loss: 0.0930 - accuracy: 0.4191 - val_loss: 0.0930 - val_accuracy: 0.4264
Epoch 89/100
20/20 [==============================] - 0s 2ms/step - loss: 0.0928 - accuracy: 0.4191 - val_loss: 0.0928 - val_accuracy: 0.4264
Epoch 90/100
20/20 [==============================] - 0s 2ms/step - loss: 0.0926 - accuracy: 0.4191 - val_loss: 0.0926 - val_accuracy: 0.4264
Epoch 91/100
20/20 [==============================] - 0s 2ms/step - loss: 0.0924 - accuracy: 0.4191 - val_loss: 0.0925 - val_accuracy: 0.4264
Epoch 92/100
20/20 [==============================] - 0s 2ms/step - loss: 0.0923 - accuracy: 0.4191 - val_loss: 0.0923 - val_accuracy: 0.4264
Epoch 93/100
20/20 [==============================] - 0s 2ms/step - loss: 0.0921 - accuracy: 0.4191 - val_loss: 0.0921 - val_accuracy: 0.4264
Epoch 94/100
20/20 [==============================] - 0s 2ms/step - loss: 0.0919 - accuracy: 0.4191 - val_loss: 0.0919 - val_accuracy: 0.4264
Epoch 95/100
20/20 [==============================] - 0s 2ms/step - loss: 0.0917 - accuracy: 0.4191 - val_loss: 0.0918 - val_accuracy: 0.4264
Epoch 96/100
20/20 [==============================] - 0s 2ms/step - loss: 0.0915 - accuracy: 0.4191 - val_loss: 0.0916 - val_accuracy: 0.4264
Epoch 97/100
20/20 [==============================] - 0s 2ms/step - loss: 0.0913 - accuracy: 0.4191 - val_loss: 0.0914 - val_accuracy: 0.4264
Epoch 98/100
20/20 [==============================] - 0s 2ms/step - loss: 0.0912 - accuracy: 0.4191 - val_loss: 0.0912 - val_accuracy: 0.4264
Epoch 99/100
20/20 [==============================] - 0s 3ms/step - loss: 0.0910 - accuracy: 0.4191 - val_loss: 0.0911 - val_accuracy: 0.4264
Epoch 100/100
20/20 [==============================] - 0s 3ms/step - loss: 0.0908 - accuracy: 0.4191 - val_loss: 0.0909 - val_accuracy: 0.4264
In [35]:
model_2.summary()
Model: "sequential_1"
_________________________________________________________________
 Layer (type)                Output Shape              Param #   
=================================================================
 dense_3 (Dense)             (None, 9)                 108       
                                                                 
 dense_4 (Dense)             (None, 9)                 90        
                                                                 
 dense_5 (Dense)             (None, 9)                 90        
                                                                 
 dense_6 (Dense)             (None, 9)                 90        
                                                                 
=================================================================
Total params: 378
Trainable params: 378
Non-trainable params: 0
_________________________________________________________________

3.E.

In [36]:
# Training Loss (blue) and Validation Loss (red)
plt.plot(history.history['loss'], color='b', label="loss")
plt.plot(history.history['val_loss'], color='r', label="val_loss")

plt.show()
In [37]:
# Training Accuracy (blue) and Validation Accuracy (red)
plt.plot(history.history['accuracy'], color='b', label="accuracy")
plt.plot(history.history['val_accuracy'], color='r', label="val_accuracy")

plt.show()

For model_2 we added

  • sigmoid activation
  • also the loss function used is mse
  • From the 2 graph we can see that the first model with Training Loss and Validation Loss as curve flattening towards right with increasing number of epochs.

  • Also the valudation accuracy becomes constant around 0.4 for both the models.

Part B¶

1.A.

In [38]:
import h5py
In [39]:
h5f = h5py.File(folderPath + '/Autonomous_Vehicles_SVHN_single_grey1.h5', 'r')

1.B.

In [40]:
h5f.keys()
Out[40]:
<KeysViewHDF5 ['X_test', 'X_train', 'X_val', 'y_test', 'y_train', 'y_val']>

1.C.

In [41]:
X_test = h5f['X_test'][:]
X_train = h5f['X_train'][:]
X_val = h5f['X_val'][:]
y_test = h5f['y_test'][:]
y_train = h5f['y_train'][:]
y_val = h5f['y_val'][:]
In [42]:
h5f.close()
In [43]:
len(X_train), len(X_test), X_train.shape, X_test.shape
Out[43]:
(42000, 18000, (42000, 32, 32), (18000, 32, 32))
  • there are 42000 training data.
  • there are 18000 test data.

2.A.

In [44]:
print(X_train.shape)
print(X_test.shape)
print(X_val.shape)
print(y_test.shape)
print(y_train.shape)
print(y_val.shape)
(42000, 32, 32)
(18000, 32, 32)
(60000, 32, 32)
(18000,)
(42000,)
(60000,)

2.B.

In [45]:
# visualizing the first 10 images in the dataset and their labels

for i in range(10):
    plt.subplot(1, 10, i+1)
    plt.imshow(X_train[i])
    plt.axis('off')
    plt.show()
    print(y_train[i: i+1])
[2]
[6]
[7]
[4]
[4]
[0]
[3]
[0]
[7]
[3]

2.C.

In [46]:
# Shape of the images and the first image

print("Shape:", X_train[0].shape)
print("\n \n")
print("First image:\n", X_train[0])
Shape: (32, 32)

 

First image:
 [[ 33.0704  30.2601  26.852  ...  71.4471  58.2204  42.9939]
 [ 25.2283  25.5533  29.9765 ... 113.0209 103.3639  84.2949]
 [ 26.2775  22.6137  40.4763 ... 113.3028 121.775  115.4228]
 ...
 [ 28.5502  36.212   45.0801 ...  24.1359  25.0927  26.0603]
 [ 38.4352  26.4733  23.2717 ...  28.1094  29.4683  30.0661]
 [ 50.2984  26.0773  24.0389 ...  49.6682  50.853   53.0377]]
In [47]:
X_train = X_train.reshape(X_train.shape[0], 1024)
X_test = X_test.reshape(X_test.shape[0], 1024)
X_val = X_val.reshape(X_val.shape[0], 1024)

2.D.

In [48]:
# Normalise the pixel values

X_train = X_train/255
X_test = X_test/255
X_val = X_val/255
In [49]:
print('Training set:', X_train.shape, y_train.shape)
print('Test set:', X_test.shape, y_test.shape)
print('Test set:', X_val, X_val)
Training set: (42000, 1024) (42000,)
Test set: (18000, 1024) (18000,)
Test set: [[0.17372157 0.18039177 0.20129648 ... 0.0840902  0.08164353 0.07874981]
 [0.2595059  0.25991058 0.26496157 ... 0.27973488 0.37018237 0.44869727]
 [0.4323859  0.41972825 0.41143882 ... 0.40005568 0.3922133  0.38829216]
 ...
 [0.16611569 0.1610647  0.16130039 ... 0.13170119 0.13796666 0.13796666]
 [0.850962   0.84704083 0.83527726 ... 0.7012145  0.7012145  0.689898  ]
 [0.45171645 0.44779527 0.45171645 ... 0.45866472 0.46258587 0.46371567]] [[0.17372157 0.18039177 0.20129648 ... 0.0840902  0.08164353 0.07874981]
 [0.2595059  0.25991058 0.26496157 ... 0.27973488 0.37018237 0.44869727]
 [0.4323859  0.41972825 0.41143882 ... 0.40005568 0.3922133  0.38829216]
 ...
 [0.16611569 0.1610647  0.16130039 ... 0.13170119 0.13796666 0.13796666]
 [0.850962   0.84704083 0.83527726 ... 0.7012145  0.7012145  0.689898  ]
 [0.45171645 0.44779527 0.45171645 ... 0.45866472 0.46258587 0.46371567]]

2.E.

In [50]:
y_train = tf.keras.utils.to_categorical(y_train)
y_test = tf.keras.utils.to_categorical(y_test)
In [51]:
y_train
Out[51]:
array([[0., 0., 1., ..., 0., 0., 0.],
       [0., 0., 0., ..., 0., 0., 0.],
       [0., 0., 0., ..., 1., 0., 0.],
       ...,
       [0., 0., 0., ..., 1., 0., 0.],
       [1., 0., 0., ..., 0., 0., 0.],
       [0., 0., 0., ..., 0., 0., 0.]], dtype=float32)

2.F.

In [52]:
# number of classes
classes_count = y_test.shape[1] 
print("Number of classes:", classes_count)
Number of classes: 10

3.A.

In [53]:
def nn_model_rrrrs():
    # create model
    model = Sequential()  
    model.add(Flatten())
    model.add(Dense(256, activation='relu'))
    model.add(Dense(64, activation='relu'))
    model.add(Dense(64, activation='relu'))
    model.add(Dense(32, activation='relu'))
    model.add(Dense(classes_count, activation='softmax'))
    return model
In [54]:
model = nn_model_rrrrs()
sgd = optimizers.Adam(lr=1e-3)

### Loss function = Categorical cross entropy
model.compile(loss='categorical_crossentropy', optimizer=sgd, metrics=['accuracy'])
c:\ProgramData\Anaconda3\lib\site-packages\keras\optimizers\optimizer_v2\adam.py:114: UserWarning: The `lr` argument is deprecated, use `learning_rate` instead.
  super().__init__(name, **kwargs)

3.B.

In [55]:
model_training_history = model.fit(X_train, y_train, validation_data=(X_test, y_test), epochs=100, batch_size=300, verbose=2)
Epoch 1/100
140/140 - 1s - loss: 2.3054 - accuracy: 0.1043 - val_loss: 2.2985 - val_accuracy: 0.1108 - 1s/epoch - 9ms/step
Epoch 2/100
140/140 - 1s - loss: 2.1779 - accuracy: 0.1891 - val_loss: 1.9674 - val_accuracy: 0.2739 - 661ms/epoch - 5ms/step
Epoch 3/100
140/140 - 1s - loss: 1.7213 - accuracy: 0.3879 - val_loss: 1.5781 - val_accuracy: 0.4509 - 603ms/epoch - 4ms/step
Epoch 4/100
140/140 - 1s - loss: 1.5308 - accuracy: 0.4781 - val_loss: 1.4207 - val_accuracy: 0.5271 - 657ms/epoch - 5ms/step
Epoch 5/100
140/140 - 1s - loss: 1.3691 - accuracy: 0.5480 - val_loss: 1.3186 - val_accuracy: 0.5646 - 616ms/epoch - 4ms/step
Epoch 6/100
140/140 - 1s - loss: 1.2634 - accuracy: 0.5890 - val_loss: 1.2098 - val_accuracy: 0.6064 - 612ms/epoch - 4ms/step
Epoch 7/100
140/140 - 1s - loss: 1.2065 - accuracy: 0.6100 - val_loss: 1.2249 - val_accuracy: 0.6001 - 908ms/epoch - 6ms/step
Epoch 8/100
140/140 - 1s - loss: 1.1592 - accuracy: 0.6297 - val_loss: 1.1519 - val_accuracy: 0.6335 - 703ms/epoch - 5ms/step
Epoch 9/100
140/140 - 1s - loss: 1.1156 - accuracy: 0.6484 - val_loss: 1.0806 - val_accuracy: 0.6636 - 668ms/epoch - 5ms/step
Epoch 10/100
140/140 - 1s - loss: 1.0730 - accuracy: 0.6668 - val_loss: 1.0332 - val_accuracy: 0.6806 - 664ms/epoch - 5ms/step
Epoch 11/100
140/140 - 1s - loss: 1.0362 - accuracy: 0.6782 - val_loss: 1.0039 - val_accuracy: 0.6926 - 726ms/epoch - 5ms/step
Epoch 12/100
140/140 - 1s - loss: 0.9981 - accuracy: 0.6900 - val_loss: 1.0063 - val_accuracy: 0.6862 - 688ms/epoch - 5ms/step
Epoch 13/100
140/140 - 1s - loss: 0.9745 - accuracy: 0.6969 - val_loss: 1.0185 - val_accuracy: 0.6896 - 757ms/epoch - 5ms/step
Epoch 14/100
140/140 - 1s - loss: 0.9472 - accuracy: 0.7064 - val_loss: 0.9971 - val_accuracy: 0.6917 - 763ms/epoch - 5ms/step
Epoch 15/100
140/140 - 1s - loss: 0.9176 - accuracy: 0.7174 - val_loss: 0.9570 - val_accuracy: 0.7008 - 724ms/epoch - 5ms/step
Epoch 16/100
140/140 - 1s - loss: 0.8974 - accuracy: 0.7229 - val_loss: 0.9402 - val_accuracy: 0.7130 - 608ms/epoch - 4ms/step
Epoch 17/100
140/140 - 1s - loss: 0.8878 - accuracy: 0.7271 - val_loss: 0.9305 - val_accuracy: 0.7121 - 608ms/epoch - 4ms/step
Epoch 18/100
140/140 - 1s - loss: 0.8582 - accuracy: 0.7353 - val_loss: 0.8906 - val_accuracy: 0.7259 - 798ms/epoch - 6ms/step
Epoch 19/100
140/140 - 1s - loss: 0.8450 - accuracy: 0.7412 - val_loss: 0.8778 - val_accuracy: 0.7298 - 586ms/epoch - 4ms/step
Epoch 20/100
140/140 - 1s - loss: 0.8273 - accuracy: 0.7457 - val_loss: 0.8740 - val_accuracy: 0.7329 - 781ms/epoch - 6ms/step
Epoch 21/100
140/140 - 1s - loss: 0.8017 - accuracy: 0.7540 - val_loss: 0.8619 - val_accuracy: 0.7371 - 624ms/epoch - 4ms/step
Epoch 22/100
140/140 - 1s - loss: 0.8036 - accuracy: 0.7529 - val_loss: 0.8246 - val_accuracy: 0.7551 - 574ms/epoch - 4ms/step
Epoch 23/100
140/140 - 1s - loss: 0.7715 - accuracy: 0.7621 - val_loss: 0.8154 - val_accuracy: 0.7584 - 580ms/epoch - 4ms/step
Epoch 24/100
140/140 - 1s - loss: 0.7611 - accuracy: 0.7654 - val_loss: 0.8239 - val_accuracy: 0.7549 - 563ms/epoch - 4ms/step
Epoch 25/100
140/140 - 1s - loss: 0.7487 - accuracy: 0.7695 - val_loss: 0.7920 - val_accuracy: 0.7641 - 592ms/epoch - 4ms/step
Epoch 26/100
140/140 - 1s - loss: 0.7352 - accuracy: 0.7739 - val_loss: 0.8327 - val_accuracy: 0.7453 - 591ms/epoch - 4ms/step
Epoch 27/100
140/140 - 1s - loss: 0.7263 - accuracy: 0.7762 - val_loss: 0.7720 - val_accuracy: 0.7724 - 575ms/epoch - 4ms/step
Epoch 28/100
140/140 - 1s - loss: 0.7186 - accuracy: 0.7792 - val_loss: 0.7999 - val_accuracy: 0.7622 - 564ms/epoch - 4ms/step
Epoch 29/100
140/140 - 1s - loss: 0.7146 - accuracy: 0.7822 - val_loss: 0.7997 - val_accuracy: 0.7623 - 536ms/epoch - 4ms/step
Epoch 30/100
140/140 - 1s - loss: 0.6923 - accuracy: 0.7880 - val_loss: 0.7796 - val_accuracy: 0.7699 - 630ms/epoch - 4ms/step
Epoch 31/100
140/140 - 1s - loss: 0.6828 - accuracy: 0.7916 - val_loss: 0.7827 - val_accuracy: 0.7660 - 667ms/epoch - 5ms/step
Epoch 32/100
140/140 - 1s - loss: 0.6736 - accuracy: 0.7938 - val_loss: 0.7351 - val_accuracy: 0.7845 - 573ms/epoch - 4ms/step
Epoch 33/100
140/140 - 1s - loss: 0.6683 - accuracy: 0.7961 - val_loss: 0.7651 - val_accuracy: 0.7689 - 582ms/epoch - 4ms/step
Epoch 34/100
140/140 - 1s - loss: 0.6549 - accuracy: 0.7980 - val_loss: 0.7523 - val_accuracy: 0.7743 - 575ms/epoch - 4ms/step
Epoch 35/100
140/140 - 1s - loss: 0.6508 - accuracy: 0.8013 - val_loss: 0.7533 - val_accuracy: 0.7779 - 647ms/epoch - 5ms/step
Epoch 36/100
140/140 - 1s - loss: 0.6575 - accuracy: 0.7987 - val_loss: 0.7647 - val_accuracy: 0.7719 - 646ms/epoch - 5ms/step
Epoch 37/100
140/140 - 1s - loss: 0.6454 - accuracy: 0.8023 - val_loss: 0.7367 - val_accuracy: 0.7838 - 623ms/epoch - 4ms/step
Epoch 38/100
140/140 - 1s - loss: 0.6349 - accuracy: 0.8052 - val_loss: 0.6971 - val_accuracy: 0.7952 - 651ms/epoch - 5ms/step
Epoch 39/100
140/140 - 1s - loss: 0.6175 - accuracy: 0.8104 - val_loss: 0.7288 - val_accuracy: 0.7876 - 585ms/epoch - 4ms/step
Epoch 40/100
140/140 - 1s - loss: 0.6225 - accuracy: 0.8085 - val_loss: 0.7387 - val_accuracy: 0.7821 - 584ms/epoch - 4ms/step
Epoch 41/100
140/140 - 1s - loss: 0.6128 - accuracy: 0.8110 - val_loss: 0.7222 - val_accuracy: 0.7847 - 610ms/epoch - 4ms/step
Epoch 42/100
140/140 - 1s - loss: 0.6163 - accuracy: 0.8105 - val_loss: 0.6878 - val_accuracy: 0.8013 - 563ms/epoch - 4ms/step
Epoch 43/100
140/140 - 1s - loss: 0.5953 - accuracy: 0.8191 - val_loss: 0.6882 - val_accuracy: 0.7981 - 541ms/epoch - 4ms/step
Epoch 44/100
140/140 - 1s - loss: 0.6024 - accuracy: 0.8152 - val_loss: 0.7291 - val_accuracy: 0.7856 - 596ms/epoch - 4ms/step
Epoch 45/100
140/140 - 1s - loss: 0.5978 - accuracy: 0.8161 - val_loss: 0.7068 - val_accuracy: 0.7934 - 643ms/epoch - 5ms/step
Epoch 46/100
140/140 - 1s - loss: 0.5765 - accuracy: 0.8238 - val_loss: 0.6842 - val_accuracy: 0.7986 - 732ms/epoch - 5ms/step
Epoch 47/100
140/140 - 1s - loss: 0.5731 - accuracy: 0.8235 - val_loss: 0.6884 - val_accuracy: 0.8006 - 615ms/epoch - 4ms/step
Epoch 48/100
140/140 - 1s - loss: 0.5844 - accuracy: 0.8199 - val_loss: 0.7294 - val_accuracy: 0.7865 - 642ms/epoch - 5ms/step
Epoch 49/100
140/140 - 1s - loss: 0.5858 - accuracy: 0.8191 - val_loss: 0.6983 - val_accuracy: 0.7933 - 532ms/epoch - 4ms/step
Epoch 50/100
140/140 - 1s - loss: 0.5731 - accuracy: 0.8250 - val_loss: 0.6930 - val_accuracy: 0.8004 - 578ms/epoch - 4ms/step
Epoch 51/100
140/140 - 1s - loss: 0.5565 - accuracy: 0.8291 - val_loss: 0.7115 - val_accuracy: 0.7928 - 632ms/epoch - 5ms/step
Epoch 52/100
140/140 - 1s - loss: 0.5636 - accuracy: 0.8273 - val_loss: 0.7134 - val_accuracy: 0.7880 - 557ms/epoch - 4ms/step
Epoch 53/100
140/140 - 1s - loss: 0.5565 - accuracy: 0.8293 - val_loss: 0.7343 - val_accuracy: 0.7829 - 548ms/epoch - 4ms/step
Epoch 54/100
140/140 - 1s - loss: 0.5509 - accuracy: 0.8292 - val_loss: 0.6829 - val_accuracy: 0.8005 - 650ms/epoch - 5ms/step
Epoch 55/100
140/140 - 1s - loss: 0.5333 - accuracy: 0.8367 - val_loss: 0.6784 - val_accuracy: 0.8037 - 591ms/epoch - 4ms/step
Epoch 56/100
140/140 - 1s - loss: 0.5500 - accuracy: 0.8310 - val_loss: 0.7025 - val_accuracy: 0.7938 - 690ms/epoch - 5ms/step
Epoch 57/100
140/140 - 1s - loss: 0.5486 - accuracy: 0.8299 - val_loss: 0.7034 - val_accuracy: 0.7922 - 622ms/epoch - 4ms/step
Epoch 58/100
140/140 - 1s - loss: 0.5258 - accuracy: 0.8375 - val_loss: 0.6618 - val_accuracy: 0.8071 - 663ms/epoch - 5ms/step
Epoch 59/100
140/140 - 1s - loss: 0.5242 - accuracy: 0.8392 - val_loss: 0.6896 - val_accuracy: 0.7986 - 553ms/epoch - 4ms/step
Epoch 60/100
140/140 - 1s - loss: 0.5268 - accuracy: 0.8373 - val_loss: 0.6936 - val_accuracy: 0.7984 - 553ms/epoch - 4ms/step
Epoch 61/100
140/140 - 1s - loss: 0.5340 - accuracy: 0.8353 - val_loss: 0.6721 - val_accuracy: 0.8062 - 636ms/epoch - 5ms/step
Epoch 62/100
140/140 - 1s - loss: 0.5198 - accuracy: 0.8410 - val_loss: 0.6652 - val_accuracy: 0.8056 - 630ms/epoch - 4ms/step
Epoch 63/100
140/140 - 1s - loss: 0.5109 - accuracy: 0.8428 - val_loss: 0.6790 - val_accuracy: 0.8034 - 627ms/epoch - 4ms/step
Epoch 64/100
140/140 - 1s - loss: 0.5206 - accuracy: 0.8394 - val_loss: 0.6739 - val_accuracy: 0.8006 - 742ms/epoch - 5ms/step
Epoch 65/100
140/140 - 1s - loss: 0.5169 - accuracy: 0.8409 - val_loss: 0.6667 - val_accuracy: 0.8077 - 677ms/epoch - 5ms/step
Epoch 66/100
140/140 - 1s - loss: 0.5117 - accuracy: 0.8404 - val_loss: 0.6851 - val_accuracy: 0.8026 - 738ms/epoch - 5ms/step
Epoch 67/100
140/140 - 1s - loss: 0.5052 - accuracy: 0.8444 - val_loss: 0.6984 - val_accuracy: 0.7961 - 636ms/epoch - 5ms/step
Epoch 68/100
140/140 - 1s - loss: 0.5134 - accuracy: 0.8403 - val_loss: 0.6857 - val_accuracy: 0.8044 - 638ms/epoch - 5ms/step
Epoch 69/100
140/140 - 1s - loss: 0.4984 - accuracy: 0.8450 - val_loss: 0.6946 - val_accuracy: 0.7982 - 737ms/epoch - 5ms/step
Epoch 70/100
140/140 - 1s - loss: 0.4947 - accuracy: 0.8472 - val_loss: 0.6648 - val_accuracy: 0.8061 - 643ms/epoch - 5ms/step
Epoch 71/100
140/140 - 1s - loss: 0.4875 - accuracy: 0.8482 - val_loss: 0.6758 - val_accuracy: 0.8031 - 686ms/epoch - 5ms/step
Epoch 72/100
140/140 - 1s - loss: 0.4895 - accuracy: 0.8483 - val_loss: 0.6824 - val_accuracy: 0.8066 - 676ms/epoch - 5ms/step
Epoch 73/100
140/140 - 1s - loss: 0.4894 - accuracy: 0.8480 - val_loss: 0.6786 - val_accuracy: 0.8061 - 740ms/epoch - 5ms/step
Epoch 74/100
140/140 - 1s - loss: 0.4886 - accuracy: 0.8487 - val_loss: 0.6750 - val_accuracy: 0.8044 - 743ms/epoch - 5ms/step
Epoch 75/100
140/140 - 1s - loss: 0.4810 - accuracy: 0.8497 - val_loss: 0.6483 - val_accuracy: 0.8147 - 748ms/epoch - 5ms/step
Epoch 76/100
140/140 - 1s - loss: 0.4799 - accuracy: 0.8503 - val_loss: 0.7065 - val_accuracy: 0.7953 - 683ms/epoch - 5ms/step
Epoch 77/100
140/140 - 1s - loss: 0.4957 - accuracy: 0.8449 - val_loss: 0.6888 - val_accuracy: 0.8034 - 670ms/epoch - 5ms/step
Epoch 78/100
140/140 - 1s - loss: 0.4784 - accuracy: 0.8514 - val_loss: 0.6470 - val_accuracy: 0.8151 - 599ms/epoch - 4ms/step
Epoch 79/100
140/140 - 1s - loss: 0.4717 - accuracy: 0.8524 - val_loss: 0.6651 - val_accuracy: 0.8113 - 546ms/epoch - 4ms/step
Epoch 80/100
140/140 - 1s - loss: 0.4674 - accuracy: 0.8551 - val_loss: 0.7171 - val_accuracy: 0.7978 - 561ms/epoch - 4ms/step
Epoch 81/100
140/140 - 1s - loss: 0.4625 - accuracy: 0.8554 - val_loss: 0.6752 - val_accuracy: 0.8066 - 590ms/epoch - 4ms/step
Epoch 82/100
140/140 - 1s - loss: 0.4647 - accuracy: 0.8556 - val_loss: 0.6618 - val_accuracy: 0.8120 - 553ms/epoch - 4ms/step
Epoch 83/100
140/140 - 1s - loss: 0.4602 - accuracy: 0.8570 - val_loss: 0.6709 - val_accuracy: 0.8094 - 534ms/epoch - 4ms/step
Epoch 84/100
140/140 - 1s - loss: 0.4654 - accuracy: 0.8535 - val_loss: 0.7048 - val_accuracy: 0.8013 - 536ms/epoch - 4ms/step
Epoch 85/100
140/140 - 1s - loss: 0.4478 - accuracy: 0.8591 - val_loss: 0.6812 - val_accuracy: 0.8095 - 576ms/epoch - 4ms/step
Epoch 86/100
140/140 - 1s - loss: 0.4579 - accuracy: 0.8571 - val_loss: 0.6685 - val_accuracy: 0.8123 - 685ms/epoch - 5ms/step
Epoch 87/100
140/140 - 1s - loss: 0.4468 - accuracy: 0.8612 - val_loss: 0.7063 - val_accuracy: 0.8024 - 610ms/epoch - 4ms/step
Epoch 88/100
140/140 - 1s - loss: 0.4426 - accuracy: 0.8624 - val_loss: 0.6817 - val_accuracy: 0.8067 - 658ms/epoch - 5ms/step
Epoch 89/100
140/140 - 1s - loss: 0.4461 - accuracy: 0.8598 - val_loss: 0.6884 - val_accuracy: 0.8068 - 630ms/epoch - 4ms/step
Epoch 90/100
140/140 - 1s - loss: 0.4606 - accuracy: 0.8556 - val_loss: 0.7069 - val_accuracy: 0.7994 - 572ms/epoch - 4ms/step
Epoch 91/100
140/140 - 1s - loss: 0.4451 - accuracy: 0.8582 - val_loss: 0.6639 - val_accuracy: 0.8126 - 604ms/epoch - 4ms/step
Epoch 92/100
140/140 - 1s - loss: 0.4411 - accuracy: 0.8606 - val_loss: 0.6776 - val_accuracy: 0.8099 - 585ms/epoch - 4ms/step
Epoch 93/100
140/140 - 1s - loss: 0.4326 - accuracy: 0.8659 - val_loss: 0.6792 - val_accuracy: 0.8116 - 550ms/epoch - 4ms/step
Epoch 94/100
140/140 - 1s - loss: 0.4267 - accuracy: 0.8653 - val_loss: 0.6814 - val_accuracy: 0.8142 - 588ms/epoch - 4ms/step
Epoch 95/100
140/140 - 1s - loss: 0.4279 - accuracy: 0.8659 - val_loss: 0.6852 - val_accuracy: 0.8091 - 603ms/epoch - 4ms/step
Epoch 96/100
140/140 - 1s - loss: 0.4246 - accuracy: 0.8656 - val_loss: 0.6600 - val_accuracy: 0.8165 - 566ms/epoch - 4ms/step
Epoch 97/100
140/140 - 1s - loss: 0.4283 - accuracy: 0.8643 - val_loss: 0.7319 - val_accuracy: 0.7948 - 599ms/epoch - 4ms/step
Epoch 98/100
140/140 - 1s - loss: 0.4274 - accuracy: 0.8654 - val_loss: 0.7255 - val_accuracy: 0.8009 - 561ms/epoch - 4ms/step
Epoch 99/100
140/140 - 1s - loss: 0.4282 - accuracy: 0.8632 - val_loss: 0.6697 - val_accuracy: 0.8156 - 601ms/epoch - 4ms/step
Epoch 100/100
140/140 - 1s - loss: 0.4283 - accuracy: 0.8662 - val_loss: 0.7055 - val_accuracy: 0.8036 - 545ms/epoch - 4ms/step

3.C.

In [56]:
scores = model.evaluate(X_test, y_test, verbose=0)
print("Loss:", scores[0])
print("Accuracy:", scores[1])
Loss: 0.7054815292358398
Accuracy: 0.8035555481910706

3.D.

In [57]:
accuracy      = model_training_history.history['accuracy']
val_accuracy  = model_training_history.history['val_accuracy']
loss     = model_training_history.history['loss']
val_loss = model_training_history.history['val_loss']

epochs   = range(len(accuracy))
plt.plot  ( epochs, accuracy, label = 'training accuracy' )
plt.plot  ( epochs, val_accuracy, label = 'validation accuracy' )
plt.title ('Training and validation accuracy')
plt.legend(loc = 'lower right')
plt.figure()

plt.plot  ( epochs, loss, label = 'training loss' )
plt.plot  ( epochs, val_loss, label = 'validation loss' )
plt.legend(loc = 'upper right')
plt.title ('Training and validation loss'   )
Out[57]:
Text(0.5, 1.0, 'Training and validation loss')
  • Training could achieve an accuracy and loss of :
    • Accuracy: 0.8089444637298584
    • Loss: 0.6811097264289856
  • There are 10 classes in the dataset